import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import plotly.express as px
import os
%matplotlib inline
from matplotlib.pyplot import figure
from imblearn.over_sampling import SMOTE
from scipy.stats import mstats
from sklearn.impute import KNNImputer
from sklearn.metrics import roc_auc_score
from sklearn.metrics import classification_report
from sklearn.metrics import f1_score
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import TimeSeriesSplit
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_squared_error, r2_score
from sklearn.ensemble import AdaBoostClassifier, BaggingClassifier
from sklearn.ensemble import RandomForestRegressor
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import StandardScaler
from imblearn.over_sampling import RandomOverSampler
from sklearn.metrics import classification_report, confusion_matrix
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.linear_model import LogisticRegression
import warnings
warnings.filterwarnings('ignore')
df1 = pd.read_excel("/Users/shashankkv/Desktop/Sem -3/Joseph Artificial Intelligence/Final_dataset.xlsx")
df1.head()
| Age | Attrition | DailyRate | Department | DistanceFromHome | Education | EducationField | EmployeeCount | EmployeeNumber | EnvironmentSatisfaction | ... | RelationshipSatisfaction | StandardHours | StockOptionLevel | TotalWorkingYears | TrainingTimesLastYear | WorkLifeBalance | YearsAtCompany | YearsInCurrentRole | YearsSinceLastPromotion | YearsWithCurrManager | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 41 | Yes | 1102 | Sales | 1 | 2 | Life Sciences | 1 | 1 | 2 | ... | 1 | 80 | 0 | 8 | 0 | 1 | 6 | 4 | 0 | 5 |
| 1 | 49 | No | 279 | Research & Development | 8 | 1 | Life Sciences | 1 | 2 | 3 | ... | 4 | 80 | 1 | 10 | 3 | 3 | 10 | 7 | 1 | 7 |
| 2 | 37 | Yes | 1373 | Research & Development | 2 | 2 | Other | 1 | 4 | 4 | ... | 2 | 80 | 0 | 7 | 3 | 3 | 0 | 0 | 0 | 0 |
| 3 | 33 | No | 1392 | Research & Development | 3 | 4 | Life Sciences | 1 | 5 | 4 | ... | 3 | 80 | 0 | 8 | 3 | 3 | 8 | 7 | 3 | 0 |
| 4 | 27 | No | 591 | Research & Development | 2 | 1 | Medical | 1 | 7 | 1 | ... | 4 | 80 | 1 | 6 | 3 | 3 | 2 | 2 | 2 | 2 |
5 rows × 34 columns
#df1 = df1.drop('StandardHours', axis=1)
#df1 = df1.drop('EmployeeNumber', axis=1)
#df1 = df1.drop('EmployeeCount', axis=1)
df1.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 1470 entries, 0 to 1469 Data columns (total 34 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Age 1470 non-null int64 1 Attrition 1470 non-null object 2 DailyRate 1470 non-null int64 3 Department 1470 non-null object 4 DistanceFromHome 1470 non-null int64 5 Education 1470 non-null int64 6 EducationField 1470 non-null object 7 EmployeeCount 1470 non-null int64 8 EmployeeNumber 1470 non-null int64 9 EnvironmentSatisfaction 1470 non-null int64 10 Gender 1470 non-null object 11 HourlyRate 1470 non-null int64 12 JobInvolvement 1470 non-null int64 13 JobLevel 1470 non-null int64 14 JobRole 1470 non-null object 15 JobSatisfaction 1470 non-null int64 16 MaritalStatus 1470 non-null object 17 MonthlyIncome 1470 non-null int64 18 MonthlyRate 1470 non-null int64 19 NumCompaniesWorked 1470 non-null int64 20 Over18 1470 non-null object 21 OverTime 1470 non-null object 22 PercentSalaryHike 1470 non-null int64 23 PerformanceRating 1470 non-null int64 24 RelationshipSatisfaction 1470 non-null int64 25 StandardHours 1470 non-null int64 26 StockOptionLevel 1470 non-null int64 27 TotalWorkingYears 1470 non-null int64 28 TrainingTimesLastYear 1470 non-null int64 29 WorkLifeBalance 1470 non-null int64 30 YearsAtCompany 1470 non-null int64 31 YearsInCurrentRole 1470 non-null int64 32 YearsSinceLastPromotion 1470 non-null int64 33 YearsWithCurrManager 1470 non-null int64 dtypes: int64(26), object(8) memory usage: 390.6+ KB
df1.isnull().sum()
Age 0 Attrition 0 DailyRate 0 Department 0 DistanceFromHome 0 Education 0 EducationField 0 EmployeeCount 0 EmployeeNumber 0 EnvironmentSatisfaction 0 Gender 0 HourlyRate 0 JobInvolvement 0 JobLevel 0 JobRole 0 JobSatisfaction 0 MaritalStatus 0 MonthlyIncome 0 MonthlyRate 0 NumCompaniesWorked 0 Over18 0 OverTime 0 PercentSalaryHike 0 PerformanceRating 0 RelationshipSatisfaction 0 StandardHours 0 StockOptionLevel 0 TotalWorkingYears 0 TrainingTimesLastYear 0 WorkLifeBalance 0 YearsAtCompany 0 YearsInCurrentRole 0 YearsSinceLastPromotion 0 YearsWithCurrManager 0 dtype: int64
df1.describe().T
| count | mean | std | min | 25% | 50% | 75% | max | |
|---|---|---|---|---|---|---|---|---|
| Age | 1470.0 | 36.923810 | 9.135373 | 18.0 | 30.00 | 36.0 | 43.00 | 60.0 |
| DailyRate | 1470.0 | 802.485714 | 403.509100 | 102.0 | 465.00 | 802.0 | 1157.00 | 1499.0 |
| DistanceFromHome | 1470.0 | 9.192517 | 8.106864 | 1.0 | 2.00 | 7.0 | 14.00 | 29.0 |
| Education | 1470.0 | 2.912925 | 1.024165 | 1.0 | 2.00 | 3.0 | 4.00 | 5.0 |
| EmployeeCount | 1470.0 | 1.000000 | 0.000000 | 1.0 | 1.00 | 1.0 | 1.00 | 1.0 |
| EmployeeNumber | 1470.0 | 1024.865306 | 602.024335 | 1.0 | 491.25 | 1020.5 | 1555.75 | 2068.0 |
| EnvironmentSatisfaction | 1470.0 | 2.721769 | 1.093082 | 1.0 | 2.00 | 3.0 | 4.00 | 4.0 |
| HourlyRate | 1470.0 | 65.891156 | 20.329428 | 30.0 | 48.00 | 66.0 | 83.75 | 100.0 |
| JobInvolvement | 1470.0 | 2.729932 | 0.711561 | 1.0 | 2.00 | 3.0 | 3.00 | 4.0 |
| JobLevel | 1470.0 | 2.063946 | 1.106940 | 1.0 | 1.00 | 2.0 | 3.00 | 5.0 |
| JobSatisfaction | 1470.0 | 2.728571 | 1.102846 | 1.0 | 2.00 | 3.0 | 4.00 | 4.0 |
| MonthlyIncome | 1470.0 | 6502.931293 | 4707.956783 | 1009.0 | 2911.00 | 4919.0 | 8379.00 | 19999.0 |
| MonthlyRate | 1470.0 | 14313.103401 | 7117.786044 | 2094.0 | 8047.00 | 14235.5 | 20461.50 | 26999.0 |
| NumCompaniesWorked | 1470.0 | 2.693197 | 2.498009 | 0.0 | 1.00 | 2.0 | 4.00 | 9.0 |
| PercentSalaryHike | 1470.0 | 15.209524 | 3.659938 | 11.0 | 12.00 | 14.0 | 18.00 | 25.0 |
| PerformanceRating | 1470.0 | 3.153741 | 0.360824 | 3.0 | 3.00 | 3.0 | 3.00 | 4.0 |
| RelationshipSatisfaction | 1470.0 | 2.712245 | 1.081209 | 1.0 | 2.00 | 3.0 | 4.00 | 4.0 |
| StandardHours | 1470.0 | 80.000000 | 0.000000 | 80.0 | 80.00 | 80.0 | 80.00 | 80.0 |
| StockOptionLevel | 1470.0 | 0.793878 | 0.852077 | 0.0 | 0.00 | 1.0 | 1.00 | 3.0 |
| TotalWorkingYears | 1470.0 | 11.279592 | 7.780782 | 0.0 | 6.00 | 10.0 | 15.00 | 40.0 |
| TrainingTimesLastYear | 1470.0 | 2.799320 | 1.289271 | 0.0 | 2.00 | 3.0 | 3.00 | 6.0 |
| WorkLifeBalance | 1470.0 | 2.761224 | 0.706476 | 1.0 | 2.00 | 3.0 | 3.00 | 4.0 |
| YearsAtCompany | 1470.0 | 7.008163 | 6.126525 | 0.0 | 3.00 | 5.0 | 9.00 | 40.0 |
| YearsInCurrentRole | 1470.0 | 4.229252 | 3.623137 | 0.0 | 2.00 | 3.0 | 7.00 | 18.0 |
| YearsSinceLastPromotion | 1470.0 | 2.187755 | 3.222430 | 0.0 | 0.00 | 1.0 | 3.00 | 15.0 |
| YearsWithCurrManager | 1470.0 | 4.123129 | 3.568136 | 0.0 | 2.00 | 3.0 | 7.00 | 17.0 |
# Create a histogram of the age distribution
plt.hist(df1['Age'])
plt.title('Age Distribution')
plt.xlabel('Age')
plt.ylabel('Frequency')
plt.show()
# Create a bar plot for job role distribution using seaborn
sns.countplot(y='JobRole', data=df1)
plt.show()
# Create a pie chart of job satisfaction levels using plotly
fig = px.pie(df1, names='JobSatisfaction', title='Job Satisfaction levels')
fig.show()
# Plotting KDE for 'Age'
sns.kdeplot(data=df1, x='Age')
plt.show()
df1['Attrition'] = df1['Attrition'].map({'Yes': 1, 'No': 0})
# Identify data types of each column
data_types = df1.dtypes
# Separate categorical and numerical variables
categorical_vars = data_types[data_types == 'object'].index
numerical_vars = data_types[data_types != 'object'].index
print("Categorical Variables:", categorical_vars)
print("Numerical Variables:", numerical_vars)
Categorical Variables: Index(['Department', 'EducationField', 'Gender', 'JobRole', 'MaritalStatus',
'Over18', 'OverTime'],
dtype='object')
Numerical Variables: Index(['Age', 'Attrition', 'DailyRate', 'DistanceFromHome', 'Education',
'EmployeeCount', 'EmployeeNumber', 'EnvironmentSatisfaction',
'HourlyRate', 'JobInvolvement', 'JobLevel', 'JobSatisfaction',
'MonthlyIncome', 'MonthlyRate', 'NumCompaniesWorked',
'PercentSalaryHike', 'PerformanceRating', 'RelationshipSatisfaction',
'StandardHours', 'StockOptionLevel', 'TotalWorkingYears',
'TrainingTimesLastYear', 'WorkLifeBalance', 'YearsAtCompany',
'YearsInCurrentRole', 'YearsSinceLastPromotion',
'YearsWithCurrManager'],
dtype='object')
# Remove categorical columns from the dataset
df = df1.drop(categorical_vars, axis=1)
df
| Age | Attrition | DailyRate | DistanceFromHome | Education | EmployeeCount | EmployeeNumber | EnvironmentSatisfaction | HourlyRate | JobInvolvement | ... | RelationshipSatisfaction | StandardHours | StockOptionLevel | TotalWorkingYears | TrainingTimesLastYear | WorkLifeBalance | YearsAtCompany | YearsInCurrentRole | YearsSinceLastPromotion | YearsWithCurrManager | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 41 | 1 | 1102 | 1 | 2 | 1 | 1 | 2 | 94 | 3 | ... | 1 | 80 | 0 | 8 | 0 | 1 | 6 | 4 | 0 | 5 |
| 1 | 49 | 0 | 279 | 8 | 1 | 1 | 2 | 3 | 61 | 2 | ... | 4 | 80 | 1 | 10 | 3 | 3 | 10 | 7 | 1 | 7 |
| 2 | 37 | 1 | 1373 | 2 | 2 | 1 | 4 | 4 | 92 | 2 | ... | 2 | 80 | 0 | 7 | 3 | 3 | 0 | 0 | 0 | 0 |
| 3 | 33 | 0 | 1392 | 3 | 4 | 1 | 5 | 4 | 56 | 3 | ... | 3 | 80 | 0 | 8 | 3 | 3 | 8 | 7 | 3 | 0 |
| 4 | 27 | 0 | 591 | 2 | 1 | 1 | 7 | 1 | 40 | 3 | ... | 4 | 80 | 1 | 6 | 3 | 3 | 2 | 2 | 2 | 2 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| 1465 | 36 | 0 | 884 | 23 | 2 | 1 | 2061 | 3 | 41 | 4 | ... | 3 | 80 | 1 | 17 | 3 | 3 | 5 | 2 | 0 | 3 |
| 1466 | 39 | 0 | 613 | 6 | 1 | 1 | 2062 | 4 | 42 | 2 | ... | 1 | 80 | 1 | 9 | 5 | 3 | 7 | 7 | 1 | 7 |
| 1467 | 27 | 0 | 155 | 4 | 3 | 1 | 2064 | 2 | 87 | 4 | ... | 2 | 80 | 1 | 6 | 0 | 3 | 6 | 2 | 0 | 3 |
| 1468 | 49 | 0 | 1023 | 2 | 3 | 1 | 2065 | 4 | 63 | 2 | ... | 4 | 80 | 0 | 17 | 3 | 2 | 9 | 6 | 0 | 8 |
| 1469 | 34 | 0 | 628 | 8 | 3 | 1 | 2068 | 2 | 82 | 4 | ... | 1 | 80 | 0 | 6 | 3 | 4 | 4 | 3 | 1 | 2 |
1470 rows × 27 columns
target_counts = df['Attrition'].value_counts()
# Calculate the balance
balance = target_counts[1] / target_counts[0]
print("Balance:", balance)
Balance: 0.1922141119221411
df['Attrition'].value_counts(normalize = True)*100
0 83.877551 1 16.122449 Name: Attrition, dtype: float64
n_c = ['Age', 'Attrition', 'DailyRate', 'DistanceFromHome', 'Education',
'EnvironmentSatisfaction', 'HourlyRate',
'JobInvolvement', 'JobLevel', 'JobSatisfaction', 'MonthlyIncome',
'MonthlyRate', 'NumCompaniesWorked', 'PercentSalaryHike',
'PerformanceRating', 'RelationshipSatisfaction', 'StockOptionLevel',
'TotalWorkingYears', 'TrainingTimesLastYear', 'WorkLifeBalance',
'YearsAtCompany', 'YearsInCurrentRole', 'YearsSinceLastPromotion']
# Calculate the IQR for each numeric column
q1 = df[n_c].quantile(0.25)
q3 = df[n_c].quantile(0.75)
iqr = q3 - q1
# Define the threshold for outlier detection
threshold = 1.5
# Identify outliers
outliers = ((df[n_c] < (q1 - threshold * iqr)) | (df[n_c] > (q3 + threshold * iqr))).any(axis=1)
# Printing the indices of the outlier rows
print("Outlier indices:")
print(df[outliers].index)
Outlier indices:
Int64Index([ 0, 1, 2, 4, 6, 7, 8, 10, 14, 15,
...
1446, 1447, 1448, 1452, 1457, 1458, 1461, 1462, 1466, 1467],
dtype='int64', length=827)
plt.figure(figsize=(12, 6))
sns.boxplot(data=df[n_c])
plt.xticks(rotation=45)
plt.title("Box Plots of Numeric Columns")
plt.xlabel("Columns")
plt.ylabel("Values")
plt.show()
# Winsorize the numeric columns
df_winsorized = pd.DataFrame()
for col in n_c:
df_winsorized[col] = mstats.winsorize(df[col], limits=[0.05, 0.05])
# Print the winsorized DataFrame
print(df_winsorized)
Age Attrition DailyRate DistanceFromHome Education \
0 41 1 1102 1 2
1 49 0 279 8 1
2 37 1 1373 2 2
3 33 0 1392 3 4
4 27 0 591 2 1
... ... ... ... ... ...
1465 36 0 884 23 2
1466 39 0 613 6 1
1467 27 0 164 4 3
1468 49 0 1023 2 3
1469 34 0 628 8 3
EnvironmentSatisfaction HourlyRate JobInvolvement JobLevel \
0 2 94 3 2
1 3 61 2 2
2 4 92 2 1
3 4 56 3 1
4 1 40 3 1
... ... ... ... ...
1465 3 41 4 2
1466 4 42 2 3
1467 2 87 4 2
1468 4 63 2 2
1469 2 82 4 2
JobSatisfaction ... PercentSalaryHike PerformanceRating \
0 4 ... 11 3
1 2 ... 22 4
2 3 ... 15 3
3 3 ... 11 3
4 2 ... 12 3
... ... ... ... ...
1465 4 ... 17 3
1466 1 ... 15 3
1467 2 ... 20 4
1468 2 ... 14 3
1469 3 ... 12 3
RelationshipSatisfaction StockOptionLevel TotalWorkingYears \
0 1 0 8
1 4 1 10
2 2 0 7
3 3 0 8
4 4 1 6
... ... ... ...
1465 3 1 17
1466 1 1 9
1467 2 1 6
1468 4 0 17
1469 1 0 6
TrainingTimesLastYear WorkLifeBalance YearsAtCompany \
0 1 1 6
1 3 3 10
2 3 3 1
3 3 3 8
4 3 3 2
... ... ... ...
1465 3 3 5
1466 5 3 7
1467 1 3 6
1468 3 2 9
1469 3 4 4
YearsInCurrentRole YearsSinceLastPromotion
0 4 0
1 7 1
2 0 0
3 7 3
4 2 2
... ... ...
1465 2 0
1466 7 1
1467 2 0
1468 6 0
1469 3 1
[1470 rows x 23 columns]
df_winsorized.head()
| Age | Attrition | DailyRate | DistanceFromHome | Education | EnvironmentSatisfaction | HourlyRate | JobInvolvement | JobLevel | JobSatisfaction | ... | PercentSalaryHike | PerformanceRating | RelationshipSatisfaction | StockOptionLevel | TotalWorkingYears | TrainingTimesLastYear | WorkLifeBalance | YearsAtCompany | YearsInCurrentRole | YearsSinceLastPromotion | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 41 | 1 | 1102 | 1 | 2 | 2 | 94 | 3 | 2 | 4 | ... | 11 | 3 | 1 | 0 | 8 | 1 | 1 | 6 | 4 | 0 |
| 1 | 49 | 0 | 279 | 8 | 1 | 3 | 61 | 2 | 2 | 2 | ... | 22 | 4 | 4 | 1 | 10 | 3 | 3 | 10 | 7 | 1 |
| 2 | 37 | 1 | 1373 | 2 | 2 | 4 | 92 | 2 | 1 | 3 | ... | 15 | 3 | 2 | 0 | 7 | 3 | 3 | 1 | 0 | 0 |
| 3 | 33 | 0 | 1392 | 3 | 4 | 4 | 56 | 3 | 1 | 3 | ... | 11 | 3 | 3 | 0 | 8 | 3 | 3 | 8 | 7 | 3 |
| 4 | 27 | 0 | 591 | 2 | 1 | 1 | 40 | 3 | 1 | 2 | ... | 12 | 3 | 4 | 1 | 6 | 3 | 3 | 2 | 2 | 2 |
5 rows × 23 columns
df_winsorized.isnull().sum()
Age 0 Attrition 0 DailyRate 0 DistanceFromHome 0 Education 0 EnvironmentSatisfaction 0 HourlyRate 0 JobInvolvement 0 JobLevel 0 JobSatisfaction 0 MonthlyIncome 0 MonthlyRate 0 NumCompaniesWorked 0 PercentSalaryHike 0 PerformanceRating 0 RelationshipSatisfaction 0 StockOptionLevel 0 TotalWorkingYears 0 TrainingTimesLastYear 0 WorkLifeBalance 0 YearsAtCompany 0 YearsInCurrentRole 0 YearsSinceLastPromotion 0 dtype: int64
correlation_matrix = df_winsorized.corr()
print(correlation_matrix)
Age Attrition DailyRate DistanceFromHome \
Age 1.000000 -0.152663 0.007397 -0.005364
Attrition -0.152663 1.000000 -0.056929 0.080134
DailyRate 0.007397 -0.056929 1.000000 -0.005119
DistanceFromHome -0.005364 0.080134 -0.005119 1.000000
Education 0.204215 -0.027852 -0.016675 0.016886
EnvironmentSatisfaction 0.011997 -0.103369 0.017921 -0.014987
HourlyRate 0.026907 -0.008387 0.024743 0.030818
JobInvolvement 0.032094 -0.130016 0.046121 0.011380
JobLevel 0.514133 -0.176362 0.001585 0.020980
JobSatisfaction -0.004015 -0.103481 0.030066 -0.003278
MonthlyIncome 0.501686 -0.160078 0.007870 -0.015138
MonthlyRate 0.033064 0.014206 -0.033727 0.029724
NumCompaniesWorked 0.303254 0.042234 0.037608 -0.028965
PercentSalaryHike 0.003674 -0.014745 0.024835 0.037484
PerformanceRating 0.000116 0.002889 -0.000441 0.026738
RelationshipSatisfaction 0.051785 -0.045872 0.007203 0.007852
StockOptionLevel 0.028440 -0.137145 0.041204 0.042582
TotalWorkingYears 0.679590 -0.180678 0.022683 0.001943
TrainingTimesLastYear -0.011059 -0.050672 -0.006538 -0.044132
WorkLifeBalance -0.014132 -0.063939 -0.037184 -0.028032
YearsAtCompany 0.282408 -0.154814 -0.023541 0.012075
YearsInCurrentRole 0.198013 -0.167308 0.009226 0.023741
YearsSinceLastPromotion 0.196157 -0.036628 -0.036826 0.005730
Education EnvironmentSatisfaction HourlyRate \
Age 0.204215 0.011997 0.026907
Attrition -0.027852 -0.103369 -0.008387
DailyRate -0.016675 0.017921 0.024743
DistanceFromHome 0.016886 -0.014987 0.030818
Education 1.000000 -0.026258 0.013901
EnvironmentSatisfaction -0.026258 1.000000 -0.049018
HourlyRate 0.013901 -0.049018 1.000000
JobInvolvement 0.039844 -0.008278 0.043433
JobLevel 0.099765 -0.000028 -0.029328
JobSatisfaction -0.010019 -0.006784 -0.072018
MonthlyIncome 0.087826 -0.007065 -0.015663
MonthlyRate -0.027015 0.036286 -0.015349
NumCompaniesWorked 0.129660 0.012396 0.024992
PercentSalaryHike -0.012706 -0.030995 -0.008766
PerformanceRating -0.029011 -0.029548 -0.001716
RelationshipSatisfaction -0.006898 0.007665 0.001567
StockOptionLevel 0.018683 0.003432 0.050529
TotalWorkingYears 0.141106 -0.009936 -0.000307
TrainingTimesLastYear -0.027299 -0.018985 -0.001249
WorkLifeBalance 0.007909 0.027627 -0.004828
YearsAtCompany 0.058388 0.001144 -0.023638
YearsInCurrentRole 0.060030 0.027555 -0.028415
YearsSinceLastPromotion 0.052051 0.018618 -0.035135
JobInvolvement JobLevel JobSatisfaction ... \
Age 0.032094 0.514133 -0.004015 ...
Attrition -0.130016 -0.176362 -0.103481 ...
DailyRate 0.046121 0.001585 0.030066 ...
DistanceFromHome 0.011380 0.020980 -0.003278 ...
Education 0.039844 0.099765 -0.010019 ...
EnvironmentSatisfaction -0.008278 -0.000028 -0.006784 ...
HourlyRate 0.043433 -0.029328 -0.072018 ...
JobInvolvement 1.000000 -0.009833 -0.021476 ...
JobLevel -0.009833 1.000000 -0.002609 ...
JobSatisfaction -0.021476 -0.002609 1.000000 ...
MonthlyIncome -0.014740 0.934033 -0.006963 ...
MonthlyRate -0.017216 0.046051 0.000932 ...
NumCompaniesWorked 0.013932 0.147778 -0.057410 ...
PercentSalaryHike -0.017444 -0.031289 0.018657 ...
PerformanceRating -0.029071 -0.016732 0.002297 ...
RelationshipSatisfaction 0.034297 0.019065 -0.012454 ...
StockOptionLevel 0.021523 0.018551 0.010690 ...
TotalWorkingYears -0.005762 0.783599 -0.019811 ...
TrainingTimesLastYear -0.005673 -0.027308 -0.004992 ...
WorkLifeBalance -0.014617 0.036691 -0.019459 ...
YearsAtCompany -0.010827 0.524932 -0.000600 ...
YearsInCurrentRole 0.008501 0.395553 0.001805 ...
YearsSinceLastPromotion -0.016993 0.335968 -0.011866 ...
PercentSalaryHike PerformanceRating \
Age 0.003674 0.000116
Attrition -0.014745 0.002889
DailyRate 0.024835 -0.000441
DistanceFromHome 0.037484 0.026738
Education -0.012706 -0.029011
EnvironmentSatisfaction -0.030995 -0.029548
HourlyRate -0.008766 -0.001716
JobInvolvement -0.017444 -0.029071
JobLevel -0.031289 -0.016732
JobSatisfaction 0.018657 0.002297
MonthlyIncome -0.029931 -0.016073
MonthlyRate -0.004338 -0.011236
NumCompaniesWorked -0.003577 -0.012245
PercentSalaryHike 1.000000 0.758391
PerformanceRating 0.758391 1.000000
RelationshipSatisfaction -0.039903 -0.031351
StockOptionLevel 0.008727 0.003506
TotalWorkingYears -0.027570 0.004392
TrainingTimesLastYear -0.007125 -0.013402
WorkLifeBalance -0.005013 0.002572
YearsAtCompany -0.040809 0.009783
YearsInCurrentRole -0.007573 0.033691
YearsSinceLastPromotion -0.040823 0.013595
RelationshipSatisfaction StockOptionLevel \
Age 0.051785 0.028440
Attrition -0.045872 -0.137145
DailyRate 0.007203 0.041204
DistanceFromHome 0.007852 0.042582
Education -0.006898 0.018683
EnvironmentSatisfaction 0.007665 0.003432
HourlyRate 0.001567 0.050529
JobInvolvement 0.034297 0.021523
JobLevel 0.019065 0.018551
JobSatisfaction -0.012454 0.010690
MonthlyIncome 0.026476 0.004912
MonthlyRate -0.002819 -0.035770
NumCompaniesWorked 0.052751 0.028898
PercentSalaryHike -0.039903 0.008727
PerformanceRating -0.031351 0.003506
RelationshipSatisfaction 1.000000 -0.045952
StockOptionLevel -0.045952 1.000000
TotalWorkingYears 0.022837 0.014782
TrainingTimesLastYear 0.006717 0.009222
WorkLifeBalance 0.019604 0.004129
YearsAtCompany 0.011535 0.021939
YearsInCurrentRole -0.020054 0.056152
YearsSinceLastPromotion 0.020279 0.016492
TotalWorkingYears TrainingTimesLastYear \
Age 0.679590 -0.011059
Attrition -0.180678 -0.050672
DailyRate 0.022683 -0.006538
DistanceFromHome 0.001943 -0.044132
Education 0.141106 -0.027299
EnvironmentSatisfaction -0.009936 -0.018985
HourlyRate -0.000307 -0.001249
JobInvolvement -0.005762 -0.005673
JobLevel 0.783599 -0.027308
JobSatisfaction -0.019811 -0.004992
MonthlyIncome 0.774523 -0.024360
MonthlyRate 0.021058 -0.003453
NumCompaniesWorked 0.251909 -0.065646
PercentSalaryHike -0.027570 -0.007125
PerformanceRating 0.004392 -0.013402
RelationshipSatisfaction 0.022837 0.006717
StockOptionLevel 0.014782 0.009222
TotalWorkingYears 1.000000 -0.032040
TrainingTimesLastYear -0.032040 1.000000
WorkLifeBalance 0.004029 0.024036
YearsAtCompany 0.607889 0.003478
YearsInCurrentRole 0.456291 -0.002654
YearsSinceLastPromotion 0.376024 0.003595
WorkLifeBalance YearsAtCompany YearsInCurrentRole \
Age -0.014132 0.282408 0.198013
Attrition -0.063939 -0.154814 -0.167308
DailyRate -0.037184 -0.023541 0.009226
DistanceFromHome -0.028032 0.012075 0.023741
Education 0.007909 0.058388 0.060030
EnvironmentSatisfaction 0.027627 0.001144 0.027555
HourlyRate -0.004828 -0.023638 -0.028415
JobInvolvement -0.014617 -0.010827 0.008501
JobLevel 0.036691 0.524932 0.395553
JobSatisfaction -0.019459 -0.000600 0.001805
MonthlyIncome 0.030743 0.494547 0.348851
MonthlyRate 0.007068 -0.031016 -0.009285
NumCompaniesWorked -0.007625 -0.120393 -0.103091
PercentSalaryHike -0.005013 -0.040809 -0.007573
PerformanceRating 0.002572 0.009783 0.033691
RelationshipSatisfaction 0.019604 0.011535 -0.020054
StockOptionLevel 0.004129 0.021939 0.056152
TotalWorkingYears 0.004029 0.607889 0.456291
TrainingTimesLastYear 0.024036 0.003478 -0.002654
WorkLifeBalance 1.000000 0.013949 0.042241
YearsAtCompany 0.013949 1.000000 0.805600
YearsInCurrentRole 0.042241 0.805600 1.000000
YearsSinceLastPromotion 0.008146 0.604095 0.550674
YearsSinceLastPromotion
Age 0.196157
Attrition -0.036628
DailyRate -0.036826
DistanceFromHome 0.005730
Education 0.052051
EnvironmentSatisfaction 0.018618
HourlyRate -0.035135
JobInvolvement -0.016993
JobLevel 0.335968
JobSatisfaction -0.011866
MonthlyIncome 0.317722
MonthlyRate -0.006008
NumCompaniesWorked -0.049714
PercentSalaryHike -0.040823
PerformanceRating 0.013595
RelationshipSatisfaction 0.020279
StockOptionLevel 0.016492
TotalWorkingYears 0.376024
TrainingTimesLastYear 0.003595
WorkLifeBalance 0.008146
YearsAtCompany 0.604095
YearsInCurrentRole 0.550674
YearsSinceLastPromotion 1.000000
[23 rows x 23 columns]
plt.figure(figsize=(25, 10))
sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm')
plt.title('Correlation Matrix')
plt.show()
X = df_winsorized.drop(columns=["Attrition"])
y = df_winsorized['Attrition']
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Print the shape of the training and testing sets
print("X_train shape:", X_train.shape)
print("X_test shape:", X_test.shape)
print("y_train shape:", y_train.shape)
print("y_test shape:", y_test.shape)
X_train shape: (1176, 22) X_test shape: (294, 22) y_train shape: (1176,) y_test shape: (294,)
print("Before OverSampling, counts of label '1':",np.sum(y == 1))
print("Before OverSampling, counts of label '0':",np.sum(y == 0))
sm = SMOTE(random_state = 42)
X_res, y_res = sm.fit_resample(X_train, y_train)
print('After OverSampling, the shape of train_X: {}'.format(X_res.shape))
print('After OverSampling, the shape of train_y: {} \n'.format(y_res.shape))
print("After OverSampling, counts of label '1': {}".format(sum(y_res == 1)))
print("After OverSampling, counts of label '0': {}".format(sum(y_res == 0)))
Before OverSampling, counts of label '1': 237 Before OverSampling, counts of label '0': 1233 After OverSampling, the shape of train_X: (1956, 22) After OverSampling, the shape of train_y: (1956,) After OverSampling, counts of label '1': 978 After OverSampling, counts of label '0': 978
X_train1, X_test1, y_train1, y_test1 = train_test_split(X_res,y_res,test_size=0.3,random_state=15)
lr1 = LogisticRegression()
lr1.fit(X_train1, y_train1)
trainpred2 = lr1.predict(X_train1)
print(classification_report(y_train1, trainpred2))
precision recall f1-score support
0 0.71 0.65 0.68 691
1 0.67 0.73 0.70 678
accuracy 0.69 1369
macro avg 0.69 0.69 0.69 1369
weighted avg 0.69 0.69 0.69 1369
conf_mat = confusion_matrix(y_train1, trainpred2)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
plt.show()
a = confusion_matrix(y_train1, trainpred2)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_train1, trainpred2) )
print("f1_score:",f1_score(y_train1, trainpred2) )
print("accuracy_score:",accuracy_score(y_train1, trainpred2))
x = (precision, Recall, roc_auc_score(y_train1, trainpred2), f1_score(y_train1, trainpred2) )
Precision: 0.6734972677595629 Recall: 0.7271386430678466 roc_auc_Score: 0.6906315501880478 f1_score: 0.699290780141844 accuracy_score: 0.6902848794740687
predictions1 = lr1.predict(X_test1)
print(classification_report(y_test1, predictions1))
precision recall f1-score support
0 0.73 0.66 0.69 287
1 0.70 0.77 0.73 300
accuracy 0.72 587
macro avg 0.72 0.71 0.71 587
weighted avg 0.72 0.72 0.71 587
conf_mat = confusion_matrix(y_test1, predictions1)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_test1, predictions1)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_test1, predictions1) )
print("f1_score:",f1_score(y_test1, predictions1) )
print("accuracy_score:",accuracy_score(y_test1, predictions1))
Precision: 0.7021276595744681 Recall: 0.77 roc_auc_Score: 0.7142682926829268 f1_score: 0.7344992050874404 accuracy_score: 0.7155025553662692
giniDecisionTree2 = DecisionTreeClassifier(criterion='gini')
giniDecisionTree2.fit(X_train1, y_train1)
DecisionTreeClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
DecisionTreeClassifier()
trainpred3 = giniDecisionTree2.predict(X_train1)
print(classification_report(y_train1, trainpred3))
precision recall f1-score support
0 1.00 1.00 1.00 691
1 1.00 1.00 1.00 678
accuracy 1.00 1369
macro avg 1.00 1.00 1.00 1369
weighted avg 1.00 1.00 1.00 1369
conf_mat = confusion_matrix(y_train1, trainpred3)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_train1, trainpred3)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_train1, trainpred3) )
print("f1_score:",f1_score(y_train1, trainpred3) )
print("accuracy_score:",accuracy_score(y_train1, trainpred3))
Precision: 1.0 Recall: 1.0 roc_auc_Score: 1.0 f1_score: 1.0 accuracy_score: 1.0
giniPred2 = giniDecisionTree2.predict(X_test1)
print(classification_report(y_test1, giniPred2))
precision recall f1-score support
0 0.84 0.74 0.78 287
1 0.77 0.86 0.82 300
accuracy 0.80 587
macro avg 0.81 0.80 0.80 587
weighted avg 0.80 0.80 0.80 587
conf_mat = confusion_matrix(y_test1, giniPred2)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_test1, giniPred2)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_test1, giniPred2) )
print("f1_score:",f1_score(y_test1, giniPred2) )
print("accuracy_score:",accuracy_score(y_test1, giniPred2))
Precision: 0.7731343283582089 Recall: 0.8633333333333333 roc_auc_Score: 0.7992624854819975 f1_score: 0.8157480314960631 accuracy_score: 0.8006814310051107
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X=pd.DataFrame(sc.fit_transform(X),columns=X.columns)
print("Before OverSampling, counts of label '1':",np.sum(y == 1))
print("Before OverSampling, counts of label '0':",np.sum(y == 0))
# import SMOTE module from imblearn library
# pip install imblearn (if you don't have imblearn in your system)
from imblearn.over_sampling import SMOTE
smt = SMOTE(random_state = 2)
X_res, y_res = smt.fit_resample(X, y)
print('After OverSampling, the shape of train_X: {}'.format(X_res.shape))
print('After OverSampling, the shape of train_y: {} \n'.format(y_res.shape))
print("After OverSampling, counts of label '1': {}".format(sum(y_res == 1)))
print("After OverSampling, counts of label '0': {}".format(sum(y_res == 0)))
Before OverSampling, counts of label '1': 237 Before OverSampling, counts of label '0': 1233 After OverSampling, the shape of train_X: (2466, 22) After OverSampling, the shape of train_y: (2466,) After OverSampling, counts of label '1': 1233 After OverSampling, counts of label '0': 1233
X_train1, X_test1, y_train1, y_test1 = train_test_split(X_res,y_res,test_size=0.3,random_state=15)
lr1 = LogisticRegression(max_iter = 100000)
lr1.fit(X_train1, y_train1)
trainpred2 = lr1.predict(X_train1)
# print classification report
print(classification_report(y_train1, trainpred2))
precision recall f1-score support
0 0.74 0.71 0.72 863
1 0.72 0.75 0.73 863
accuracy 0.73 1726
macro avg 0.73 0.73 0.73 1726
weighted avg 0.73 0.73 0.73 1726
conf_mat = confusion_matrix(y_train1, trainpred2)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_train1, trainpred2)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_train1, trainpred2) )
print("f1_score:",f1_score(y_train1, trainpred2) )
print("accuracy_score:",accuracy_score(y_train1, trainpred2))
Precision: 0.7187153931339978 Recall: 0.7520278099652375 roc_auc_Score: 0.7288528389339514 f1_score: 0.7349943374858436 accuracy_score: 0.7288528389339514
predictions1 = lr1.predict(X_test1)
# print classification report
print(classification_report(y_test1, predictions1))
precision recall f1-score support
0 0.73 0.70 0.71 370
1 0.71 0.74 0.72 370
accuracy 0.72 740
macro avg 0.72 0.72 0.72 740
weighted avg 0.72 0.72 0.72 740
conf_mat = confusion_matrix(y_test1, predictions1)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_test1, predictions1)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_test1, predictions1) )
print("f1_score:",f1_score(y_test1, predictions1) )
print("accuracy_score:",accuracy_score(y_test1, predictions1))
Precision: 0.7098445595854922 Recall: 0.7405405405405405 roc_auc_Score: 0.7189189189189189 f1_score: 0.7248677248677249 accuracy_score: 0.7189189189189189
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(lr1,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.79036426 0.81985282 0.77215127 0.79231681 0.77022926] Bias_error: 0.21101711569795278 VE: 0.02000611023202749 Accuracy: 0.79 (+/- 0.04)
giniDecisionTree2 = DecisionTreeClassifier(criterion='gini')
giniDecisionTree2.fit(X_train1, y_train1)
DecisionTreeClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
DecisionTreeClassifier()
trainpred3 = giniDecisionTree2.predict(X_train1)
print(classification_report(y_train1, trainpred3))
precision recall f1-score support
0 1.00 1.00 1.00 863
1 1.00 1.00 1.00 863
accuracy 1.00 1726
macro avg 1.00 1.00 1.00 1726
weighted avg 1.00 1.00 1.00 1726
conf_mat = confusion_matrix(y_train1, trainpred3)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_train1, trainpred3)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_train1, trainpred3) )
print("f1_score:",f1_score(y_train1, trainpred3) )
print("accuracy_score:",accuracy_score(y_train1, trainpred3))
Precision: 1.0 Recall: 1.0 roc_auc_Score: 1.0 f1_score: 1.0 accuracy_score: 1.0
giniPred2 = giniDecisionTree2.predict(X_test1)
print(classification_report(y_test1, giniPred2))
precision recall f1-score support
0 0.82 0.79 0.81 370
1 0.80 0.83 0.81 370
accuracy 0.81 740
macro avg 0.81 0.81 0.81 740
weighted avg 0.81 0.81 0.81 740
conf_mat = confusion_matrix(y_test1, giniPred2)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
a = confusion_matrix(y_test1, giniPred2)
TN = a[0,0]
FP = a[0,1]
FN = a[1,0]
TP = a[1,1]
precision = TP/float(TP+FP)
Recall = TP/float(TP+FN)
print("Precision:", precision)
print("Recall:",Recall)
print("roc_auc_Score:",roc_auc_score(y_test1, giniPred2) )
print("f1_score:",f1_score(y_test1, giniPred2) )
print("accuracy_score:",accuracy_score(y_test1, giniPred2))
Precision: 0.7994791666666666 Recall: 0.8297297297297297 roc_auc_Score: 0.8108108108108107 f1_score: 0.8143236074270556 accuracy_score: 0.8108108108108109
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(giniDecisionTree2,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.83541554 0.88666139 0.84413259 0.83586806 0.83448788] Bias_error: 0.15268690889373038 VE: 0.022335121198477054 Accuracy: 0.85 (+/- 0.04)
rf = RandomForestClassifier(random_state=42)
rf.fit(X_train1,y_train1)
RandomForestClassifier(random_state=42)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
RandomForestClassifier(random_state=42)
rf.score(X_train1,y_train1)
1.0
pred_rf=rf.predict(X_test1)
rf.predict_proba(X_test1)
array([[0.15, 0.85],
[0.12, 0.88],
[0.37, 0.63],
...,
[0.41, 0.59],
[0.83, 0.17],
[0.83, 0.17]])
print(classification_report(y_test1,pred_rf))
precision recall f1-score support
0 0.89 0.93 0.91 370
1 0.92 0.89 0.91 370
accuracy 0.91 740
macro avg 0.91 0.91 0.91 740
weighted avg 0.91 0.91 0.91 740
print(confusion_matrix(y_test1,pred_rf))
[[343 27] [ 41 329]]
roc_auc_score(y_test1,pred_rf)
0.908108108108108
pred_train2 = rf.predict(X_train1)
print(confusion_matrix(y_train1, pred_train2))
[[863 0] [ 0 863]]
c = confusion_matrix(y_train1, pred_train2)
TN = c[0,0]
FP = c[0,1]
FN = c[1,0]
TP = c[1,1]
precision4 = TP/float(TP+FP)
Recall4 = TP/float(TP+FN)
print("Precision:", precision4)
print("Recall:",Recall4)
print("roc_auc_Score:",roc_auc_score(y_train1, pred_train2) )
print("f1_score:",f1_score(y_train1, pred_train2) )
print("accuracy_score:",accuracy_score(y_train1, pred_train2))
Precision: 1.0 Recall: 1.0 roc_auc_Score: 1.0 f1_score: 1.0 accuracy_score: 1.0
print(confusion_matrix(y_test1,pred_rf))
[[343 27] [ 41 329]]
conf_mat = confusion_matrix(y_test1, pred_rf)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
c1 = confusion_matrix(y_test1,pred_rf)
TN = c1[0,0]
FP = c1[0,1]
FN = c1[1,0]
TP = c1[1,1]
precision5 = TP/float(TP+FP)
Recall5 = TP/float(TP+FN)
print("Precision:", precision5)
print("Recall:",Recall5)
print("roc_auc_Score:",roc_auc_score(y_test1,pred_rf) )
print("f1_score:",f1_score(y_test1,pred_rf) )
print("accuracy_score:",accuracy_score(y_test1,pred_rf))
Precision: 0.9241573033707865 Recall: 0.8891891891891892 roc_auc_Score: 0.908108108108108 f1_score: 0.90633608815427 accuracy_score: 0.9081081081081082
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(rf,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.96675061 0.98966274 0.97331947 0.98065212 0.97243114] Bias_error: 0.02343678350322964 VE: 0.008834587212657315 Accuracy: 0.98 (+/- 0.02)
ada=AdaBoostClassifier()
ada.fit(X_train1,y_train1)
AdaBoostClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
AdaBoostClassifier()
ada.score(X_train1,y_train1)
0.9009269988412515
pred_ada=ada.predict(X_test1)
ada.predict_proba(X_test1)
array([[0.48364311, 0.51635689],
[0.47140314, 0.52859686],
[0.49072158, 0.50927842],
...,
[0.4995125 , 0.5004875 ],
[0.5020442 , 0.4979558 ],
[0.50310049, 0.49689951]])
print(classification_report(y_test1,pred_ada))
precision recall f1-score support
0 0.88 0.88 0.88 370
1 0.88 0.88 0.88 370
accuracy 0.88 740
macro avg 0.88 0.88 0.88 740
weighted avg 0.88 0.88 0.88 740
print(confusion_matrix(y_test1,pred_ada))
[[324 46] [ 44 326]]
roc_auc_score(y_test1,pred_ada)
0.8783783783783783
pred_train1 = ada.predict(X_train1)
print(confusion_matrix(y_train1, pred_train1))
[[790 73] [ 98 765]]
d = confusion_matrix(y_train1, pred_train2)
TN = d[0,0]
FP = d[0,1]
FN = d[1,0]
TP = d[1,1]
precision6 = TP/float(TP+FP)
Recall6 = TP/float(TP+FN)
print("Precision:", precision6)
print("Recall:",Recall6)
print("roc_auc_Score:",roc_auc_score(y_train1, pred_train1) )
print("f1_score:",f1_score(y_train1, pred_train1) )
print("accuracy_score:",accuracy_score(y_train1, pred_train1))
Precision: 1.0 Recall: 1.0 roc_auc_Score: 0.9009269988412514 f1_score: 0.8994708994708994 accuracy_score: 0.9009269988412515
print(confusion_matrix(y_test1,pred_ada))
[[324 46] [ 44 326]]
d1 = confusion_matrix(y_test1,pred_ada)
TN = d1[0,0]
FP = d1[0,1]
FN = d1[1,0]
TP = d1[1,1]
precision7 = TP/float(TP+FP)
Recall7 = TP/float(TP+FN)
print("Precision:", precision7)
print("Recall:",Recall7)
print("roc_auc_Score:",roc_auc_score(y_test1,pred_ada) )
print("f1_score:",f1_score(y_test1,pred_ada) )
print("accuracy_score:",accuracy_score(y_test1,pred_ada))
Precision: 0.8763440860215054 Recall: 0.8810810810810811 roc_auc_Score: 0.8783783783783783 f1_score: 0.8787061994609164 accuracy_score: 0.8783783783783784
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(ada,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.93047257 0.97143847 0.93449493 0.95253144 0.93496619] Bias_error: 0.0552192798461697 VE: 0.017157760065195562 Accuracy: 0.94 (+/- 0.03)
bgc=BaggingClassifier()
bgc.fit(X_train1,y_train1)
BaggingClassifier()In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
BaggingClassifier()
bgc.score(X_train1,y_train1)
0.9913093858632677
pred_bgc=bgc.predict(X_test1)
bgc.predict_proba(X_test1)
array([[0. , 1. ],
[0.1, 0.9],
[0.4, 0.6],
...,
[0.2, 0.8],
[0.8, 0.2],
[0.8, 0.2]])
print(classification_report(y_test1,pred_bgc))
precision recall f1-score support
0 0.86 0.91 0.89 370
1 0.91 0.86 0.88 370
accuracy 0.89 740
macro avg 0.89 0.89 0.89 740
weighted avg 0.89 0.89 0.89 740
print(confusion_matrix(y_test1,pred_bgc))
[[338 32] [ 53 317]]
roc_auc_score(y_test1,pred_bgc)
0.8851351351351351
pred_train4 = bgc.predict(X_train1)
print(confusion_matrix(y_train1, pred_train4))
[[860 3] [ 12 851]]
f = confusion_matrix(y_train1, pred_train4)
TN = f[0,0]
FP = f[0,1]
FN = f[1,0]
TP = f[1,1]
precision10 = TP/float(TP+FP)
Recall10 = TP/float(TP+FN)
print("Precision:", precision10)
print("Recall:",Recall10)
print("roc_auc_Score:",roc_auc_score(y_train1, pred_train4) )
print("f1_score:",f1_score(y_train1, pred_train4) )
print("accuracy_score:",accuracy_score(y_train1, pred_train4))
Precision: 0.9964871194379391 Recall: 0.9860950173812283 roc_auc_Score: 0.9913093858632677 f1_score: 0.9912638322655796 accuracy_score: 0.9913093858632677
print(confusion_matrix(y_test1,pred_bgc))
[[338 32] [ 53 317]]
f1 = confusion_matrix(y_test1,pred_bgc)
TN = f1[0,0]
FP = f1[0,1]
FN = f1[1,0]
TP = f1[1,1]
precision11 = TP/float(TP+FP)
Recall11 = TP/float(TP+FN)
print("Precision:", precision11)
print("Recall:",Recall11)
print("roc_auc_Score:",roc_auc_score(y_test1,pred_bgc) )
print("f1_score:",f1_score(y_test1,pred_bgc) )
print("accuracy_score:",accuracy_score(y_test1,pred_bgc))
Precision: 0.9083094555873925 Recall: 0.8567567567567568 roc_auc_Score: 0.8851351351351351 f1_score: 0.881780250347705 accuracy_score: 0.8851351351351351
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(bgc,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.92649746 0.97068772 0.95912946 0.95456416 0.92827808] Bias_error: 0.05216862496726815 VE: 0.019575858945259397 Accuracy: 0.95 (+/- 0.04)
import xgboost as xgb
xgbc=xgb.XGBClassifier()
xgbc.fit(X_train1, y_train1)
XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)xgbc.score(X_train1, y_train1)
1.0
pred_xgbc = xgbc.predict(X_test1)
print(classification_report(y_test1, pred_xgbc))
precision recall f1-score support
0 0.89 0.93 0.91 370
1 0.93 0.89 0.91 370
accuracy 0.91 740
macro avg 0.91 0.91 0.91 740
weighted avg 0.91 0.91 0.91 740
print(confusion_matrix(y_test1, pred_xgbc))
[[344 26] [ 42 328]]
roc_auc_score(y_test1, pred_xgbc)
0.9081081081081082
pred_train5 = xgbc.predict(X_train1)
print(confusion_matrix(y_train1, pred_train5))
[[863 0] [ 0 863]]
g = confusion_matrix(y_train1, pred_train5)
TN = g[0,0]
FP = g[0,1]
FN = g[1,0]
TP = g[1,1]
precision12 = TP/float(TP+FP)
Recall12 = TP/float(TP+FN)
print("Precision:", precision12)
print("Recall:",Recall12)
print("roc_auc_Score:",roc_auc_score(y_train1, pred_train5) )
print("f1_score:",f1_score(y_train1, pred_train5) )
print("accuracy_score:",accuracy_score(y_train1, pred_train5))
Precision: 1.0 Recall: 1.0 roc_auc_Score: 1.0 f1_score: 1.0 accuracy_score: 1.0
print(confusion_matrix(y_test1,pred_xgbc))
[[344 26] [ 42 328]]
g1 = confusion_matrix(y_test1,pred_xgbc)
TN = g1[0,0]
FP = g1[0,1]
FN = g1[1,0]
TP = g1[1,1]
precision13 = TP/float(TP+FP)
Recall13 = TP/float(TP+FN)
print("Precision:", precision13)
print("Recall:",Recall13)
print("roc_auc_Score:",roc_auc_score(y_test1,pred_xgbc) )
print("f1_score:",f1_score(y_test1,pred_xgbc) )
print("accuracy_score:",accuracy_score(y_test1,pred_xgbc))
Precision: 0.9265536723163842 Recall: 0.8864864864864865 roc_auc_Score: 0.9081081081081082 f1_score: 0.9060773480662982 accuracy_score: 0.9081081081081082
conf_mat = confusion_matrix(y_test1,pred_xgbc)
sns.heatmap(conf_mat, annot=True, fmt='d', cbar=False)
plt.xlabel('Predicted Values')
plt.ylabel('Actual Values')
plt.title('Actual vs. Predicted Confusion Matrix')
b, t = plt.ylim()
b += 0.5
t -= 0.5
plt.ylim(b, t)
plt.show()
from sklearn import model_selection
kf=model_selection.KFold(n_splits=5,shuffle=True,random_state=0)
values=model_selection.cross_val_score(xgbc,X_res, y_res,cv=kf,scoring='roc_auc')
print(values)
b_e=1-np.mean(values)
v_e=np.std(values,ddof=1)
print('Bias_error:',b_e)
print('VE:',v_e)
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
[0.95228219 0.98554599 0.96784147 0.97096583 0.95622629] Bias_error: 0.033427646727233196 VE: 0.013154483480689972 Accuracy: 0.97 (+/- 0.02)
from sklearn.model_selection import GridSearchCV
from xgboost import XGBClassifier
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Define the parameter grid for grid search
param_grid = {
'n_estimators': [50, 100, 150],
'max_depth': [5, 10, 15],
'learning_rate': [0.01, 0.1, 0.2],
'subsample': [0.5, 0.7, 1.0],
'colsample_bytree': [0.5, 0.7, 1.0]
}
# Create the XGBClassifier object
xgb_classifier = XGBClassifier(use_label_encoder=False)
# Create the GridSearchCV object
grid_search = GridSearchCV(xgb_classifier, param_grid, cv=5, scoring='accuracy')
# Fit the data to perform grid search
grid_search.fit(X_train, y_train)
GridSearchCV(cv=5,
estimator=XGBClassifier(base_score=None, booster=None,
callbacks=None, colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
early_stopping_rounds=None,
enable_categorical=False, eval_metric=None,
feature_types=None, gamma=None,
gpu_id=None, grow_policy=None,
importance_type=None,
interaction_constraints=None,
learning_rate=None,...
max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None,
random_state=None, ...),
param_grid={'colsample_bytree': [0.5, 0.7, 1.0],
'learning_rate': [0.01, 0.1, 0.2],
'max_depth': [5, 10, 15],
'n_estimators': [50, 100, 150],
'subsample': [0.5, 0.7, 1.0]},
scoring='accuracy')In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. GridSearchCV(cv=5,
estimator=XGBClassifier(base_score=None, booster=None,
callbacks=None, colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
early_stopping_rounds=None,
enable_categorical=False, eval_metric=None,
feature_types=None, gamma=None,
gpu_id=None, grow_policy=None,
importance_type=None,
interaction_constraints=None,
learning_rate=None,...
max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None,
random_state=None, ...),
param_grid={'colsample_bytree': [0.5, 0.7, 1.0],
'learning_rate': [0.01, 0.1, 0.2],
'max_depth': [5, 10, 15],
'n_estimators': [50, 100, 150],
'subsample': [0.5, 0.7, 1.0]},
scoring='accuracy')XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=None, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=None, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)# Get the best parameters and best score from grid search
best_params = grid_search.best_params_
best_score = grid_search.best_score_
# Use the best parameters to create the final model
final_model = XGBClassifier(**best_params)
# Fit the final model on the training data
final_model.fit(X_train, y_train)
XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=0.7, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=0.01, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=10, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=150, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=0.7, early_stopping_rounds=None,
enable_categorical=False, eval_metric=None, feature_types=None,
gamma=None, gpu_id=None, grow_policy=None, importance_type=None,
interaction_constraints=None, learning_rate=0.01, max_bin=None,
max_cat_threshold=None, max_cat_to_onehot=None,
max_delta_step=None, max_depth=10, max_leaves=None,
min_child_weight=None, missing=nan, monotone_constraints=None,
n_estimators=150, n_jobs=None, num_parallel_tree=None,
predictor=None, random_state=None, ...)# Make predictions on the train set
y_pred1 = final_model.predict(X_train)
# Calculate accuracy on the test set
accuracy = accuracy_score(y_train, y_pred1)
print("Accuracy:", accuracy)
Accuracy: 0.9217687074829932
# Make predictions on the test set
y_pred = final_model.predict(X_test)
# Calculate accuracy on the test set
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
Accuracy: 0.8707482993197279
xgbc = XGBClassifier(use_label_encoder=False, eval_metric='logloss')
# Define the KFold cross-validation
kf = model_selection.KFold(n_splits=5, shuffle=True, random_state=0)
# Create the GridSearchCV object
grid_search = GridSearchCV(estimator=xgbc, param_grid=param_grid, scoring='roc_auc', cv=kf, verbose=2)
# Fit GridSearchCV
grid_search.fit(X_res, y_res)
Fitting 5 folds for each of 243 candidates, totalling 1215 fits [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.5, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.5s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=0.7, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.4s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.01, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.1, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.3s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=0.7; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=50, subsample=1.0; total time= 0.0s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=5, n_estimators=150, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=10, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=50, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.5; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=0.7; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.1s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=100, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.5; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=0.7; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s [CV] END colsample_bytree=1.0, learning_rate=0.2, max_depth=15, n_estimators=150, subsample=1.0; total time= 0.2s
GridSearchCV(cv=KFold(n_splits=5, random_state=0, shuffle=True),
estimator=XGBClassifier(base_score=None, booster=None,
callbacks=None, colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
early_stopping_rounds=None,
enable_categorical=False,
eval_metric='logloss', feature_types=None,
gamma=None, gpu_id=None, grow_policy=None,
importance_type=None...
max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None,
random_state=None, ...),
param_grid={'colsample_bytree': [0.5, 0.7, 1.0],
'learning_rate': [0.01, 0.1, 0.2],
'max_depth': [5, 10, 15],
'n_estimators': [50, 100, 150],
'subsample': [0.5, 0.7, 1.0]},
scoring='roc_auc', verbose=2)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. GridSearchCV(cv=KFold(n_splits=5, random_state=0, shuffle=True),
estimator=XGBClassifier(base_score=None, booster=None,
callbacks=None, colsample_bylevel=None,
colsample_bynode=None,
colsample_bytree=None,
early_stopping_rounds=None,
enable_categorical=False,
eval_metric='logloss', feature_types=None,
gamma=None, gpu_id=None, grow_policy=None,
importance_type=None...
max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None,
missing=nan, monotone_constraints=None,
n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None,
random_state=None, ...),
param_grid={'colsample_bytree': [0.5, 0.7, 1.0],
'learning_rate': [0.01, 0.1, 0.2],
'max_depth': [5, 10, 15],
'n_estimators': [50, 100, 150],
'subsample': [0.5, 0.7, 1.0]},
scoring='roc_auc', verbose=2)XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric='logloss',
feature_types=None, gamma=None, gpu_id=None, grow_policy=None,
importance_type=None, interaction_constraints=None,
learning_rate=None, max_bin=None, max_cat_threshold=None,
max_cat_to_onehot=None, max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None, missing=nan,
monotone_constraints=None, n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None, random_state=None, ...)XGBClassifier(base_score=None, booster=None, callbacks=None,
colsample_bylevel=None, colsample_bynode=None,
colsample_bytree=None, early_stopping_rounds=None,
enable_categorical=False, eval_metric='logloss',
feature_types=None, gamma=None, gpu_id=None, grow_policy=None,
importance_type=None, interaction_constraints=None,
learning_rate=None, max_bin=None, max_cat_threshold=None,
max_cat_to_onehot=None, max_delta_step=None, max_depth=None,
max_leaves=None, min_child_weight=None, missing=nan,
monotone_constraints=None, n_estimators=100, n_jobs=None,
num_parallel_tree=None, predictor=None, random_state=None, ...)# Get the best score and best parameters
print("Best Score:", grid_search.best_score_)
print("Best Parameters:", grid_search.best_params_)
Best Score: 0.9741992232630985
Best Parameters: {'colsample_bytree': 1.0, 'learning_rate': 0.2, 'max_depth': 15, 'n_estimators': 100, 'subsample': 0.5}
# Perform cross-validation with the best parameters
values = model_selection.cross_val_score(XGBClassifier(**grid_search.best_params_), X_res, y_res, cv=kf, scoring='roc_auc')
# Print the individual cross-validation scores
print(values)
[0.96390302 0.99001749 0.97150447 0.97636447 0.96920666]
# Calculate and print the bias error and variance error
b_e = 1 - np.mean(values)
v_e = np.std(values, ddof=1)
print('Bias_error:', b_e)
print('VE:', v_e)
Bias_error: 0.02580077673690151 VE: 0.009913438821941059
# Print the average score and 95% confidence interval
print("Accuracy: %0.2f (+/- %0.2f)" % (values.mean(), values.std() * 2))
Accuracy: 0.97 (+/- 0.02)
results=[[0.79036426, 0.81985282, 0.77215127, 0.79231681, 0.77022926],
[0.81265946, 0.88066363, 0.83406758, 0.85410494, 0.82248062],
[0.96675061, 0.98966274, 0.97331947, 0.98065212, 0.97243114],
[0.93047257, 0.97143847, 0.93449493, 0.95253144, 0.93496619],
[0.92778134, 0.97894598, 0.96023496, 0.95488511, 0.93700313],
[0.95228219, 0.98554599, 0.96784147, 0.97096583, 0.95622629]]
names=['Logistic_Regression','Decision Tree','Random Forest','Ada-boost','Bagging','XGBC']
fig = plt.figure(figsize=(12,8))
fig.suptitle('Algorithm Comparison')
ax = fig.add_subplot(111)
plt.boxplot(results)
plt.grid(True)
ax.set_xticklabels(names)
plt.show()
From the box plot comparison diagram, we can see that Random Forest and XGBC are better models amongst all of the models. But when we observe train and test scores of Random Forest and XGBC, we can see that XGBC model is performing better on both train and test data. So we are finalizing best model as XGBC model.